Explaining Text Classification
from explainer.explainers import feature_attributions_explainer, metrics_explainer
feature_attributions_explainer.entry_points
True
import numpy as np
from sklearn import datasets
all_categories = ['alt.atheism','comp.graphics','comp.os.ms-windows.misc','comp.sys.ibm.pc.hardware',
'comp.sys.mac.hardware','comp.windows.x', 'misc.forsale','rec.autos','rec.motorcycles',
'rec.sport.baseball','rec.sport.hockey','sci.crypt','sci.electronics','sci.med',
'sci.space','soc.religion.christian','talk.politics.guns','talk.politics.mideast',
'talk.politics.misc','talk.religion.misc']
selected_categories = ['alt.atheism','comp.graphics','rec.motorcycles','sci.space','talk.politics.misc']
X_train_text, Y_train = datasets.fetch_20newsgroups(subset="train", categories=selected_categories, return_X_y=True)
X_test_text , Y_test = datasets.fetch_20newsgroups(subset="test", categories=selected_categories, return_X_y=True)
X_train_text = np.array(X_train_text)
X_test_text = np.array(X_test_text)
classes = np.unique(Y_train)
mapping = dict(zip(classes, selected_categories))
len(X_train_text), len(X_test_text), classes, mapping
(2720,
1810,
array([0, 1, 2, 3, 4]),
{0: 'alt.atheism',
1: 'comp.graphics',
2: 'rec.motorcycles',
3: 'sci.space',
4: 'talk.politics.misc'})
print(Y_test)
[2 3 0 ... 3 2 3]
Vectorize Text Data
import sklearn
import numpy as np
from sklearn.feature_extraction.text import CountVectorizer, TfidfVectorizer
vectorizer = TfidfVectorizer(max_features=50000)
vectorizer.fit(np.concatenate((X_train_text, X_test_text)))
X_train = vectorizer.transform(X_train_text)
X_test = vectorizer.transform(X_test_text)
X_train, X_test = X_train.toarray(), X_test.toarray()
X_train.shape, X_test.shape
((2720, 50000), (1810, 50000))
Define the Model
from tensorflow.keras.models import Sequential
from tensorflow.keras import layers
def create_model():
return Sequential([
layers.Input(shape=X_train.shape[1:]),
layers.Dense(128, activation="relu"),
layers.Dense(64, activation="relu"),
layers.Dense(len(classes), activation="softmax"),
])
model = create_model()
model.summary()
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
dense (Dense) (None, 128) 6400128
dense_1 (Dense) (None, 64) 8256
dense_2 (Dense) (None, 5) 325
=================================================================
Total params: 6,408,709
Trainable params: 6,408,709
Non-trainable params: 0
_________________________________________________________________
2022-10-18 19:55:27.146965: I tensorflow/core/platform/cpu_feature_guard.cc:193] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations: AVX2 AVX512F AVX512_VNNI AVX512_BF16 AVX_VNNI AMX_TILE AMX_INT8 FMA
To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.
User settings:
KMP_AFFINITY=granularity=fine,verbose,compact,1,0
KMP_BLOCKTIME=1
KMP_DUPLICATE_LIB_OK=True
KMP_INIT_AT_FORK=FALSE
KMP_SETTINGS=1
KMP_WARNINGS=0
Effective settings:
KMP_ABORT_DELAY=0
KMP_ADAPTIVE_LOCK_PROPS='1,1024'
KMP_ALIGN_ALLOC=64
KMP_ALL_THREADPRIVATE=768
KMP_ATOMIC_MODE=2
KMP_BLOCKTIME=1
KMP_CPUINFO_FILE: value is not defined
KMP_DETERMINISTIC_REDUCTION=false
KMP_DEVICE_THREAD_LIMIT=2147483647
KMP_DISP_NUM_BUFFERS=7
KMP_DUPLICATE_LIB_OK=true
KMP_ENABLE_TASK_THROTTLING=true
KMP_FORCE_REDUCTION: value is not defined
KMP_FOREIGN_THREADS_THREADPRIVATE=true
KMP_FORKJOIN_BARRIER='2,2'
KMP_FORKJOIN_BARRIER_PATTERN='hyper,hyper'
KMP_GTID_MODE=3
KMP_HANDLE_SIGNALS=false
KMP_HOT_TEAMS_MAX_LEVEL=1
KMP_HOT_TEAMS_MODE=0
KMP_INIT_AT_FORK=true
KMP_LIBRARY=throughput
KMP_LOCK_KIND=queuing
KMP_MALLOC_POOL_INCR=1M
KMP_NUM_LOCKS_IN_BLOCK=1
KMP_PLAIN_BARRIER='2,2'
KMP_PLAIN_BARRIER_PATTERN='hyper,hyper'
KMP_REDUCTION_BARRIER='1,1'
KMP_REDUCTION_BARRIER_PATTERN='hyper,hyper'
KMP_SCHEDULE='static,balanced;guided,iterative'
KMP_SETTINGS=true
KMP_SPIN_BACKOFF_PARAMS='4096,100'
KMP_STACKOFFSET=64
KMP_STACKPAD=0
KMP_STACKSIZE=8M
KMP_STORAGE_MAP=false
KMP_TASKING=2
KMP_TASKLOOP_MIN_TASKS=0
KMP_TASK_STEALING_CONSTRAINT=1
KMP_TEAMS_THREAD_LIMIT=192
KMP_TOPOLOGY_METHOD=all
KMP_USE_YIELD=1
KMP_VERSION=false
KMP_WARNINGS=false
OMP_AFFINITY_FORMAT='OMP: pid %P tid %i thread %n bound to OS proc set {%A}'
OMP_ALLOCATOR=omp_default_mem_alloc
OMP_CANCELLATION=false
OMP_DEFAULT_DEVICE=0
OMP_DISPLAY_AFFINITY=false
OMP_DISPLAY_ENV=false
OMP_DYNAMIC=false
OMP_MAX_ACTIVE_LEVELS=1
OMP_MAX_TASK_PRIORITY=0
OMP_NESTED: deprecated; max-active-levels-var=1
OMP_NUM_THREADS: value is not defined
OMP_PLACES: value is not defined
OMP_PROC_BIND='intel'
OMP_SCHEDULE='static'
OMP_STACKSIZE=8M
OMP_TARGET_OFFLOAD=DEFAULT
OMP_THREAD_LIMIT=2147483647
OMP_WAIT_POLICY=PASSIVE
KMP_AFFINITY='verbose,warnings,respect,granularity=fine,compact,1,0'
2022-10-18 19:55:27.191465: I tensorflow/core/common_runtime/process_util.cc:146] Creating new thread pool with default inter op setting:
Compile and Train Model
model.compile("adam", "sparse_categorical_crossentropy", metrics=["accuracy"])
history = model.fit(X_train, Y_train, batch_size=256, epochs=5, validation_data=(X_test, Y_test))
Epoch 1/5
1/11 [=>............................] - ETA: 25s - loss: 1.6100 - accuracy: 0.1758
2/11 [====>.........................] - ETA: 0s - loss: 1.6076 - accuracy: 0.3184
3/11 [=======>......................] - ETA: 0s - loss: 1.6045 - accuracy: 0.4232
4/11 [=========>....................] - ETA: 0s - loss: 1.5999 - accuracy: 0.5117
5/11 [============>.................] - ETA: 0s - loss: 1.5946 - accuracy: 0.5758
6/11 [===============>..............] - ETA: 0s - loss: 1.5880 - accuracy: 0.6263
7/11 [==================>...........] - ETA: 0s - loss: 1.5804 - accuracy: 0.6618
8/11 [====================>.........] - ETA: 0s - loss: 1.5726 - accuracy: 0.6865
9/11 [=======================>......] - ETA: 0s - loss: 1.5647 - accuracy: 0.7096
10/11 [==========================>...] - ETA: 0s - loss: 1.5566 - accuracy: 0.7293
11/11 [==============================] - ETA: 0s - loss: 1.5507 - accuracy: 0.7408
11/11 [==============================] - 4s 165ms/step - loss: 1.5507 - accuracy: 0.7408 - val_loss: 1.4508 - val_accuracy: 0.8718
Epoch 2/5
1/11 [=>............................] - ETA: 0s - loss: 1.3868 - accuracy: 0.9961
2/11 [====>.........................] - ETA: 1s - loss: 1.3720 - accuracy: 0.9922
3/11 [=======>......................] - ETA: 0s - loss: 1.3599 - accuracy: 0.9909
4/11 [=========>....................] - ETA: 0s - loss: 1.3409 - accuracy: 0.9902
5/11 [============>.................] - ETA: 0s - loss: 1.3256 - accuracy: 0.9906
7/11 [==================>...........] - ETA: 0s - loss: 1.2934 - accuracy: 0.9888
8/11 [====================>.........] - ETA: 0s - loss: 1.2771 - accuracy: 0.9873
9/11 [=======================>......] - ETA: 0s - loss: 1.2572 - accuracy: 0.9874
10/11 [==========================>...] - ETA: 0s - loss: 1.2411 - accuracy: 0.9879
11/11 [==============================] - ETA: 0s - loss: 1.2295 - accuracy: 0.9875
11/11 [==============================] - 1s 103ms/step - loss: 1.2295 - accuracy: 0.9875 - val_loss: 1.1457 - val_accuracy: 0.8978
Epoch 3/5
1/11 [=>............................] - ETA: 0s - loss: 0.9487 - accuracy: 0.9961
2/11 [====>.........................] - ETA: 0s - loss: 0.9241 - accuracy: 0.9980
3/11 [=======>......................] - ETA: 0s - loss: 0.9084 - accuracy: 0.9948
4/11 [=========>....................] - ETA: 0s - loss: 0.8853 - accuracy: 0.9951
5/11 [============>.................] - ETA: 0s - loss: 0.8787 - accuracy: 0.9961
7/11 [==================>...........] - ETA: 0s - loss: 0.8384 - accuracy: 0.9967
9/11 [=======================>......] - ETA: 0s - loss: 0.7971 - accuracy: 0.9965
11/11 [==============================] - ETA: 0s - loss: 0.7677 - accuracy: 0.9971
11/11 [==============================] - 1s 109ms/step - loss: 0.7677 - accuracy: 0.9971 - val_loss: 0.7861 - val_accuracy: 0.9260
Epoch 4/5
1/11 [=>............................] - ETA: 1s - loss: 0.4893 - accuracy: 1.0000
2/11 [====>.........................] - ETA: 0s - loss: 0.4876 - accuracy: 0.9980
3/11 [=======>......................] - ETA: 0s - loss: 0.4708 - accuracy: 0.9987
4/11 [=========>....................] - ETA: 0s - loss: 0.4550 - accuracy: 0.9980
5/11 [============>.................] - ETA: 0s - loss: 0.4390 - accuracy: 0.9977
6/11 [===============>..............] - ETA: 0s - loss: 0.4217 - accuracy: 0.9980
7/11 [==================>...........] - ETA: 0s - loss: 0.4085 - accuracy: 0.9983
8/11 [====================>.........] - ETA: 0s - loss: 0.3960 - accuracy: 0.9985
9/11 [=======================>......] - ETA: 0s - loss: 0.3821 - accuracy: 0.9983
10/11 [==========================>...] - ETA: 0s - loss: 0.3694 - accuracy: 0.9980
11/11 [==============================] - 1s 108ms/step - loss: 0.3611 - accuracy: 0.9982 - val_loss: 0.5030 - val_accuracy: 0.9398
Epoch 5/5
1/11 [=>............................] - ETA: 0s - loss: 0.2055 - accuracy: 1.0000
2/11 [====>.........................] - ETA: 0s - loss: 0.1939 - accuracy: 1.0000
3/11 [=======>......................] - ETA: 0s - loss: 0.1853 - accuracy: 1.0000
4/11 [=========>....................] - ETA: 0s - loss: 0.1760 - accuracy: 0.9990
6/11 [===============>..............] - ETA: 0s - loss: 0.1622 - accuracy: 0.9987
7/11 [==================>...........] - ETA: 0s - loss: 0.1556 - accuracy: 0.9989
9/11 [=======================>......] - ETA: 0s - loss: 0.1455 - accuracy: 0.9991
10/11 [==========================>...] - ETA: 0s - loss: 0.1422 - accuracy: 0.9988
11/11 [==============================] - 1s 84ms/step - loss: 0.1389 - accuracy: 0.9989 - val_loss: 0.3482 - val_accuracy: 0.9459
Evaluate Model Performance
from sklearn.metrics import accuracy_score, classification_report
train_preds = model.predict(X_train)
test_preds = model.predict(X_test)
print("Train Accuracy : {:.3f}".format(accuracy_score(Y_train, np.argmax(train_preds, axis=1))))
print("Test Accuracy : {:.3f}".format(accuracy_score(Y_test, np.argmax(test_preds, axis=1))))
print("\nClassification Report : ")
print(classification_report(Y_test, np.argmax(test_preds, axis=1), target_names=selected_categories))
1/85 [..............................] - ETA: 9s
10/85 [==>...........................] - ETA: 0s
17/85 [=====>........................] - ETA: 0s
25/85 [=======>......................] - ETA: 0s
35/85 [===========>..................] - ETA: 0s
45/85 [==============>...............] - ETA: 0s
55/85 [==================>...........] - ETA: 0s
66/85 [======================>.......] - ETA: 0s
75/85 [=========================>....] - ETA: 0s
81/85 [===========================>..] - ETA: 0s
85/85 [==============================] - 1s 6ms/step
1/57 [..............................] - ETA: 4s
9/57 [===>..........................] - ETA: 0s
18/57 [========>.....................] - ETA: 0s
26/57 [============>.................] - ETA: 0s
38/57 [===================>..........] - ETA: 0s
52/57 [==========================>...] - ETA: 0s
57/57 [==============================] - 0s 5ms/step
Train Accuracy : 1.000
Test Accuracy : 0.946
Classification Report :
precision recall f1-score support
alt.atheism 0.96 0.93 0.95 319
comp.graphics 0.94 0.95 0.95 389
rec.motorcycles 0.98 0.99 0.98 398
sci.space 0.91 0.94 0.92 394
talk.politics.misc 0.95 0.90 0.92 310
accuracy 0.95 1810
macro avg 0.95 0.94 0.94 1810
weighted avg 0.95 0.95 0.95 1810
# one-hot-encode clasess
oh_Y_test = np.eye(len(classes))[Y_test]
cm = metrics_explainer['confusionmatrix'](oh_Y_test, test_preds, selected_categories)
cm.visualize()
print(cm.report)
precision recall f1-score support
alt.atheism 0.96 0.93 0.95 319
comp.graphics 0.94 0.95 0.95 389
rec.motorcycles 0.98 0.99 0.98 398
sci.space 0.91 0.94 0.92 394
talk.politics.misc 0.95 0.90 0.92 310
accuracy 0.95 1810
macro avg 0.95 0.94 0.94 1810
weighted avg 0.95 0.95 0.95 1810
plotter = metrics_explainer['plot'](oh_Y_test, test_preds, selected_categories)
plotter.pr_curve()
plotter.roc_curve()
SHAP Partition Explainer
import shap
def make_predictions(X_batch_text):
X_batch = vectorizer.transform(X_batch_text).toarray()
preds = model.predict(X_batch)
return preds
masker = shap.maskers.Text(tokenizer=r"\W+")
explainer = shap.Explainer(make_predictions, masker=masker, output_names=selected_categories)
explainer
<shap.explainers._partition.Partition at 0x7f1d185d2940>
Visualize SHAP Values Correct Predictions
import re
X_batch_text = X_test_text[1:3]
X_batch = X_test[1:3]
print("Samples : ")
for text in X_batch_text:
print(re.split(r"\W+", text))
print()
preds_proba = model.predict(X_batch)
preds = preds_proba.argmax(axis=1)
# print("Actual Target Values : {}".format([selected_categories[target] for target in Y_test[1:3]]))
# print("Predicted Target Values : {}".format([selected_categories[target] for target in preds]))
# print("Predicted Probabilities : {}".format(preds_proba.max(axis=1)))
shap_values = explainer(X_batch_text)
Samples :
['From', 'prb', 'access', 'digex', 'net', 'Pat', 'Subject', 'Re', 'Near', 'Miss', 'Asteroids', 'Q', 'Organization', 'Express', 'Access', 'Online', 'Communications', 'Greenbelt', 'MD', 'USA', 'Lines', '4', 'Distribution', 'sci', 'NNTP', 'Posting', 'Host', 'access', 'digex', 'net', 'TRry', 'the', 'SKywatch', 'project', 'in', 'Arizona', 'pat', '']
['From', 'cobb', 'alexia', 'lis', 'uiuc', 'edu', 'Mike', 'Cobb', 'Subject', 'Science', 'and', 'theories', 'Organization', 'University', 'of', 'Illinois', 'at', 'Urbana', 'Lines', '19', 'As', 'per', 'various', 'threads', 'on', 'science', 'and', 'creationism', 'I', 've', 'started', 'dabbling', 'into', 'a', 'book', 'called', 'Christianity', 'and', 'the', 'Nature', 'of', 'Science', 'by', 'JP', 'Moreland', 'A', 'question', 'that', 'I', 'had', 'come', 'from', 'one', 'of', 'his', 'comments', 'He', 'stated', 'that', 'God', 'is', 'not', 'necessarily', 'a', 'religious', 'term', 'but', 'could', 'be', 'used', 'as', 'other', 'scientific', 'terms', 'that', 'give', 'explanation', 'for', 'events', 'or', 'theories', 'without', 'being', 'a', 'proven', 'scientific', 'fact', 'I', 'think', 'I', 'got', 'his', 'point', 'I', 'can', 'quote', 'the', 'section', 'if', 'I', 'm', 'being', 'vague', 'The', 'examples', 'he', 'gave', 'were', 'quarks', 'and', 'continental', 'plates', 'Are', 'there', 'explanations', 'of', 'science', 'or', 'parts', 'of', 'theories', 'that', 'are', 'not', 'measurable', 'in', 'and', 'of', 'themselves', 'or', 'can', 'everything', 'be', 'quantified', 'measured', 'tested', 'etc', 'MAC', 'Michael', 'A', 'Cobb', 'and', 'I', 'won', 't', 'raise', 'taxes', 'on', 'the', 'middle', 'University', 'of', 'Illinois', 'class', 'to', 'pay', 'for', 'my', 'programs', 'Champaign', 'Urbana', 'Bill', 'Clinton', '3rd', 'Debate', 'cobb', 'alexia', 'lis', 'uiuc', 'edu', 'Nobody', 'can', 'explain', 'everything', 'to', 'anybody', 'G', 'K', 'Chesterton', '']
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 37ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 33ms/step
The module `llvmlite.llvmpy` is deprecated and will be removed in the future.
The module `llvmlite.llvmpy.core` is deprecated and will be removed in the future. Equivalent functionality is provided by `llvmlite.ir`.
dpctl could not find any non-host SYCL device on the system. A non-host SYCL device is required to use numba_dpex.
Numba extension module 'numba_dpex.numpy_usm_shared' failed to load due to 'ImportError(No non-host SYCL device found to execute kernels.)'.
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 49ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 54ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 101ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 101ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 54ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 56ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 61ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 64ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 136ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 64ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 70ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 64ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 64ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 65ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 110ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 66ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 67ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 65ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 143ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 118ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 66ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 81ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 85ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 66ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 65ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 66ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 67ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 115ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 65ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 65ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 135ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 65ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 66ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 66ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 66ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 65ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 65ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 70ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 157ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 82ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 64ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 63ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 71ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 68ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 73ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 65ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 65ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 65ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 64ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 82ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 85ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 115ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 83ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 66ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 136ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 233ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 137ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 64ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 64ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 86ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 64ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 64ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 65ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 65ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 65ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 179ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 83ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 65ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 65ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 64ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 64ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 127ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 69ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 83ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 67ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 138ms/step
Partition explainer: 50%|███████████████████████████████████████▌ | 1/2 [00:00<?, ?it/s]
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 72ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 134ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 135ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 126ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 94ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 55ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 53ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 55ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 71ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 55ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 56ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 129ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 63ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 66ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 67ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 111ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 81ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 82ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 118ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 82ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 64ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 66ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 179ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 70ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 66ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 71ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 61ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 135ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 63ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 67ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 135ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 65ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 121ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 111ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 64ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 65ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 66ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 63ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 62ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 143ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 63ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 63ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 66ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 85ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 69ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 138ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 64ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 135ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 64ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 194ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 63ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 64ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 63ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 110ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 63ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 82ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 62ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 64ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 112ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 62ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 66ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 62ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 63ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 82ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 64ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 63ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 189ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 64ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 64ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 136ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 62ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 62ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 136ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 64ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 64ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 82ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 135ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 80ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 136ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 63ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 82ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 81ms/step
1/1 [==============================] - ETA: 0s
1/1 [==============================] - 0s 83ms/step
Partition explainer: 3it [00:45, 9.56s/it]
Partition explainer: 3it [00:45, 22.81s/it]
Text Plot
shap.text_plot(shap_values)
Bar Plots
Bar Plot 1
shap.plots.bar(shap_values[:,:, selected_categories[preds[0]]].mean(axis=0), max_display=15,
order=shap.Explanation.argsort.flip)
Creating an ndarray from ragged nested sequences (which is a list-or-tuple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated. If you meant to do this, you must specify 'dtype=object' when creating the ndarray.
Bar Plot 2
shap.plots.bar(shap_values[0,:, selected_categories[preds[0]]], max_display=15,
order=shap.Explanation.argsort.flip)
Bar Plot 3
shap.plots.bar(shap_values[:,:, selected_categories[preds[1]]].mean(axis=0), max_display=15,
order=shap.Explanation.argsort.flip)
Bar Plot 4
shap.plots.bar(shap_values[1,:, selected_categories[preds[1]]], max_display=15,
order=shap.Explanation.argsort.flip)
Waterfall Plots
Waterfall Plot 1
shap.waterfall_plot(shap_values[0][:, selected_categories[preds[0]]], max_display=15)
Waterfall Plot 2
shap.waterfall_plot(shap_values[1][:, selected_categories[preds[1]]], max_display=15)
Force Plot
import re
tokens = re.split("\W+", X_batch_text[0].lower())
shap.initjs()
shap.force_plot(shap_values.base_values[0][preds[0]], shap_values[0][:, preds[0]].values,
feature_names = tokens[:-1], out_names=selected_categories[preds[0]])
Visualization omitted, Javascript library not loaded!
Have you run `initjs()` in this notebook? If this notebook was from another user you must also trust this notebook (File -> Trust notebook). If you are viewing this notebook on github the Javascript has been stripped for security. If you are using JupyterLab this error is because a JupyterLab extension has not yet been written.
Have you run `initjs()` in this notebook? If this notebook was from another user you must also trust this notebook (File -> Trust notebook). If you are viewing this notebook on github the Javascript has been stripped for security. If you are using JupyterLab this error is because a JupyterLab extension has not yet been written.